This is a project I have done in Artificial Intelligence and Deeo Learning center of iran. A MLP classifier reunnig on MNIST-FASHION dataset.
I downloaded the mnist_fashion dataset manually and it contains 4 files which are test، test_label, train and train_label.
Using id2xnumpy to open the files and output is numpy array.
import numpy as np
import idx2numpy
pre_X_train = idx2numpy.convert_from_file('D:/Internship/train-images-idx3-ubyte')
pre_Y_train = idx2numpy.convert_from_file('D:/Internship/train-labels-idx1-ubyte')
pre_X_test = idx2numpy.convert_from_file('D:/AInternship/t10k-images-idx3-ubyte')
pre_Y_test = idx2numpy.convert_from_file('D:/Internship/t10k-labels-idx1-ubyte')
Using matplotlib to show 80 samples of the train data set.
'''
from sklearn.model_selction import KFold
X = np.array ([1,2],[3,4],[1,2],[3,4])
Y = np.array ([1,2,3,4])
kf = KFold(n_splite = 2)
for train_index,test_index in kf.split(X):
X_train , X_test = X[train_index] , X[test_index]
Y_train , Y_test = Y[train_index] , Y[test_index]
'''
Using matplotlib to show 80 samples of the train data set.
import matplotlib.pyplot as plt
rand = np.random.randint(0,59999,(1,80))
plt.figure ( figsize = (12,15) )
for i in range(80):
Sample = pre_X_train[rand[0][i]]
plt.subplot(10,10,i+1)
plt.imshow( Sample , cmap = "binary" )
plt.axis("off")
if pre_Y_train[rand[0][i]] == 0 :
plt.title('T-shirt/top', fontsize = 12)
elif pre_Y_train[rand[0][i]] == 1 :
plt.title('Trouser', fontsize = 12)
elif pre_Y_train[rand[0][i]] == 2 :
plt.title('Pullover', fontsize = 12)
elif pre_Y_train[rand[0][i]] == 3 :
plt.title('Dress', fontsize = 12)
elif pre_Y_train[rand[0][i]] == 4 :
plt.title('Coat', fontsize = 12)
elif pre_Y_train[rand[0][i]] == 5 :
plt.title('Sandal', fontsize = 12)
elif pre_Y_train[rand[0][i]] == 6 :
plt.title('Shirt', fontsize = 12)
elif pre_Y_train[rand[0][i]] == 7 :
plt.title('Sneaker', fontsize = 12)
elif pre_Y_train[rand[0][i]] == 8 :
plt.title('Bag', fontsize = 12)
elif pre_Y_train[rand[0][i]] == 9 :
plt.title('Ankle boot', fontsize = 12)
plt.show
Using matplotlib to show 80 samples of the test data.
import matplotlib.pyplot as plt
rand = np.random.randint(0,9999,(1,80))
plt.figure ( figsize = (12,15) )
for i in range(80):
Sample = pre_X_test[rand[0][i]]
plt.subplot(10,10,i+1)
plt.imshow( Sample , cmap = "binary" )
plt.axis("off")
if pre_Y_test[rand[0][i]] == 0 :
plt.title('T-shirt/top', fontsize = 12)
elif pre_Y_test[rand[0][i]] == 1 :
plt.title('Trouser', fontsize = 12)
elif pre_Y_test[rand[0][i]] == 2 :
plt.title('Pullover', fontsize = 12)
elif pre_Y_test[rand[0][i]] == 3 :
plt.title('Dress', fontsize = 12)
elif pre_Y_test[rand[0][i]] == 4 :
plt.title('Coat', fontsize = 12)
elif pre_Y_test[rand[0][i]] == 5 :
plt.title('Sandal', fontsize = 12)
elif pre_Y_test[rand[0][i]] == 6 :
plt.title('Shirt', fontsize = 12)
elif pre_Y_test[rand[0][i]] == 7 :
plt.title('Sneaker', fontsize = 12)
elif pre_Y_test[rand[0][i]] == 8 :
plt.title('Bag', fontsize = 12)
elif pre_Y_test[rand[0][i]] == 9 :
plt.title('Ankle boot', fontsize = 12)
plt.show
Considering the fact that the elements of numy arrays are between 0 and 255, I divide the elements by 255 to normalize.
X_train = pre_X_train.reshape(60000, 784)
X_test = pre_X_test.reshape(10000, 784)
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
X_train /= 255
X_test /= 255
from keras.utils import np_utils
Y_train = np_utils.to_categorical(pre_Y_train)
Y_test = np_utils.to_categorical(pre_Y_test)
difinite the model. the number of neuron in evey layer will change to observe the effect. first layer=200 second = 50 third = 10
from keras.models import Sequential
from keras.layers import Dense
from keras.optimizers import SGD
from keras.losses import categorical_crossentropy
MLP_Model = Sequential()
MLP_Model.add(Dense(200, activation='relu', input_shape=(784,)))
MLP_Model.add(Dense(50, activation='relu'))
MLP_Model.add(Dense(10, activation='softmax'))
MLP_Model.summary()
MLP_Model.compile(optimizer = SGD(lr=0.001) , loss = categorical_crossentropy , metrics=['accuracy'])
fit X_train and Y_train using MLP_Model.fit() with epoch=25. usinf history in order to show loss, val_loss, accuracy and val_accuracy of each epoch.
network_history = MLP_Model.fit(X_train, Y_train, batch_size=128, epochs=25, validation_split=0.2)
history = network_history.history
import matplotlib.pyplot as plt
losses = history['loss']
val_losses = history['val_loss']
accuracies = history['accuracy']
val_accuracies = history['val_accuracy']
show the fact that loss and val_loss decrease by each epoch
plt.xlabel('Epochs')
plt.ylabel('Loss')
plt.plot(losses)
plt.plot(val_losses)
plt.legend(['loss', 'val_loss'])
show the fact that accuray and val_accuray increase by each epoch.
plt.xlabel('Epochs')
plt.ylabel('Accuracy')
plt.plot(accuracies)
plt.plot(val_accuracies)
plt.legend(['acc', 'val_acc'])
run the model on test data and evaluate the loss and accuracy
test_loss, test_acc = MLP_Model.evaluate(X_test, Y_test)
Y_test_predict = MLP_Model.predict(X_test)
Y_test_predict = np.argmax(Y_test_predict, axis=1)
show 80 samples of predicted data.
import matplotlib.pyplot as plt
rand = np.random.randint(0,9999,(1,80))
plt.figure ( figsize = (12,15) )
for i in range(80):
Sample = pre_X_test[rand[0][i]]
plt.subplot(10,10,i+1)
plt.imshow( Sample , cmap = "binary" )
plt.axis("off")
if Y_test_predict[rand[0][i]] == 0 :
plt.title('T-shirt/top', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 1 :
plt.title('Trouser', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 2 :
plt.title('Pullover', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 3 :
plt.title('Dress', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 4 :
plt.title('Coat', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 5 :
plt.title('Sandal', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 6 :
plt.title('Shirt', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 7 :
plt.title('Sneaker', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 8 :
plt.title('Bag', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 9 :
plt.title('Ankle boot', fontsize = 12)
plt.show
calculation of confusion matrix
from sklearn.metrics import confusion_matrix
#Y_test_predict = Y_test_predict.astype('float32')
confusion_matrix ( pre_Y_test , Y_test_predict )
change the number of neurons(200 to 1000 in first layer) and observe the effect.
from keras.models import Sequential
from keras.layers import Dense
from keras.optimizers import SGD
from keras.losses import categorical_crossentropy
MLP_Model = Sequential()
MLP_Model.add(Dense(1000, activation='relu', input_shape=(784,)))
MLP_Model.add(Dense(100, activation='relu'))
MLP_Model.add(Dense(10, activation='softmax'))
MLP_Model.summary()
MLP_Model.compile(optimizer = SGD(lr=0.001) , loss = categorical_crossentropy , metrics=['accuracy'])
observe the effect of the number of neurons on losses, val_losses, accuracy and val_accuracy.
network_history = MLP_Model.fit(X_train, Y_train, batch_size=128, epochs=25, validation_split=0.2)
history = network_history.history
import matplotlib.pyplot as plt
losses = history['loss']
val_losses = history['val_loss']
accuracies = history['accuracy']
val_accuracies = history['val_accuracy']
show the fact that loss and val_loss decrease by each epoch
plt.xlabel('Epochs')
plt.ylabel('Loss')
plt.plot(losses)
plt.plot(val_losses)
plt.legend(['loss', 'val_loss'])
show the fact that accuray and val_accuray increase by each epoch.
plt.xlabel('Epochs')
plt.ylabel('Accuracy')
plt.plot(accuracies)
plt.plot(val_accuracies)
plt.legend(['acc', 'val_acc'])
run the model on test data and evaluate the loss and accuracy
test_loss, test_acc = MLP_Model.evaluate(X_test, Y_test)
Y_test_predict = MLP_Model.predict(X_test)
Y_test_predict = np.argmax(Y_test_predict, axis=1)
show 80 samples of predicted data.
import matplotlib.pyplot as plt
rand = np.random.randint(0,9999,(1,80))
plt.figure ( figsize = (12,15) )
for i in range(80):
Sample = pre_X_test[rand[0][i]]
plt.subplot(10,10,i+1)
plt.imshow( Sample , cmap = "binary" )
plt.axis("off")
if Y_test_predict[rand[0][i]] == 0 :
plt.title('T-shirt/top', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 1 :
plt.title('Trouser', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 2 :
plt.title('Pullover', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 3 :
plt.title('Dress', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 4 :
plt.title('Coat', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 5 :
plt.title('Sandal', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 6 :
plt.title('Shirt', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 7 :
plt.title('Sneaker', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 8 :
plt.title('Bag', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 9 :
plt.title('Ankle boot', fontsize = 12)
plt.show
calculate the confusion matrix
from sklearn.metrics import confusion_matrix
#Y_test_predict = Y_test_predict.astype('float32')
confusion_matrix ( pre_Y_test , Y_test_predict )
change the the number of neurons in each layer.
from keras.models import Sequential
from keras.layers import Dense
from keras.optimizers import SGD
from keras.losses import categorical_crossentropy
MLP_Model = Sequential()
MLP_Model.add(Dense(500, activation='relu', input_shape=(784,)))
MLP_Model.add(Dense(250, activation='relu'))
MLP_Model.add(Dense(10, activation='softmax'))
MLP_Model.summary()
MLP_Model.compile(optimizer = SGD(lr=0.001) , loss = categorical_crossentropy , metrics=['accuracy'])
observe the effect of the number of neurons on losses, val_losses, accuracy and val_accuracy.
network_history = MLP_Model.fit(X_train, Y_train, batch_size=128, epochs=25, validation_split=0.2)
history = network_history.history
import matplotlib.pyplot as plt
losses = history['loss']
val_losses = history['val_loss']
accuracies = history['accuracy']
val_accuracies = history['val_accuracy']
show the fact that loss and val_loss decrease by each epoch
plt.xlabel('Epochs')
plt.ylabel('Loss')
plt.plot(losses)
plt.plot(val_losses)
plt.legend(['loss', 'val_loss'])
show the fact that accuray and val_accuray increase by each epoch.
plt.xlabel('Epochs')
plt.ylabel('Accuracy')
plt.plot(accuracies)
plt.plot(val_accuracies)
plt.legend(['acc', 'val_acc'])
run the model on test data and evaluate the loss and accuracy
test_loss, test_acc = MLP_Model.evaluate(X_test, Y_test)
Y_test_predict = MLP_Model.predict(X_test)
Y_test_predict = np.argmax(Y_test_predict, axis=1)
show 80 samples of predicted data.
import matplotlib.pyplot as plt
rand = np.random.randint(0,9999,(1,80))
plt.figure ( figsize = (12,15) )
for i in range(80):
Sample = pre_X_test[rand[0][i]]
plt.subplot(10,10,i+1)
plt.imshow( Sample , cmap = "binary" )
plt.axis("off")
if Y_test_predict[rand[0][i]] == 0 :
plt.title('T-shirt/top', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 1 :
plt.title('Trouser', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 2 :
plt.title('Pullover', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 3 :
plt.title('Dress', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 4 :
plt.title('Coat', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 5 :
plt.title('Sandal', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 6 :
plt.title('Shirt', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 7 :
plt.title('Sneaker', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 8 :
plt.title('Bag', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 9 :
plt.title('Ankle boot', fontsize = 12)
plt.show
calculate the confusion matrix
from sklearn.metrics import confusion_matrix
#Y_test_predict = Y_test_predict.astype('float32')
confusion_matrix ( pre_Y_test , Y_test_predict )
change the the number of neurons in each layer.
from keras.models import Sequential
from keras.layers import Dense
from keras.optimizers import SGD
from keras.losses import categorical_crossentropy
MLP_Model = Sequential()
MLP_Model.add(Dense(700, activation='relu', input_shape=(784,)))
MLP_Model.add(Dense(300, activation='relu'))
MLP_Model.add(Dense(10, activation='softmax'))
MLP_Model.summary()
MLP_Model.compile(optimizer = SGD(lr=0.001) , loss = categorical_crossentropy , metrics=['accuracy'])
observe the effect of the number of neurons on losses, val_losses, accuracy and val_accuracy.
network_history = MLP_Model.fit(X_train, Y_train, batch_size=128, epochs=25, validation_split=0.2)
history = network_history.history
import matplotlib.pyplot as plt
losses = history['loss']
val_losses = history['val_loss']
accuracies = history['accuracy']
val_accuracies = history['val_accuracy']
show the fact that loss and val_loss decrease by each epoch
plt.xlabel('Epochs')
plt.ylabel('Loss')
plt.plot(losses)
plt.plot(val_losses)
plt.legend(['loss', 'val_loss'])
show the fact that accuray and val_accuray increase by each epoch.
plt.xlabel('Epochs')
plt.ylabel('Accuracy')
plt.plot(accuracies)
plt.plot(val_accuracies)
plt.legend(['acc', 'val_acc'])
run the model on test data and evaluate the loss and accuracy
test_loss, test_acc = MLP_Model.evaluate(X_test, Y_test)
Y_test_predict = MLP_Model.predict(X_test)
Y_test_predict = np.argmax(Y_test_predict, axis=1)
show 80 samples of predicted data.
import matplotlib.pyplot as plt
rand = np.random.randint(0,9999,(1,80))
plt.figure ( figsize = (12,15) )
for i in range(80):
Sample = pre_X_test[rand[0][i]]
plt.subplot(10,10,i+1)
plt.imshow( Sample , cmap = "binary" )
plt.axis("off")
if Y_test_predict[rand[0][i]] == 0 :
plt.title('T-shirt/top', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 1 :
plt.title('Trouser', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 2 :
plt.title('Pullover', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 3 :
plt.title('Dress', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 4 :
plt.title('Coat', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 5 :
plt.title('Sandal', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 6 :
plt.title('Shirt', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 7 :
plt.title('Sneaker', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 8 :
plt.title('Bag', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 9 :
plt.title('Ankle boot', fontsize = 12)
plt.show
calculate the confusion matrix
from sklearn.metrics import confusion_matrix
#Y_test_predict = Y_test_predict.astype('float32')
confusion_matrix ( pre_Y_test , Y_test_predict )
change the the number of neurons in each layer.
from keras.models import Sequential
from keras.layers import Dense
from keras.optimizers import SGD
from keras.losses import categorical_crossentropy
MLP_Model = Sequential()
MLP_Model.add(Dense(500, activation='relu', input_shape=(784,)))
MLP_Model.add(Dense(100, activation='relu'))
MLP_Model.add(Dense(10, activation='softmax'))
MLP_Model.summary()
MLP_Model.compile(optimizer = SGD(lr=0.001) , loss = categorical_crossentropy , metrics=['accuracy'])
observe the effect of the number of neurons on losses, val_losses, accuracy and val_accuracy.
network_history = MLP_Model.fit(X_train, Y_train, batch_size=32, epochs=25, validation_split=0.2)
history = network_history.history
import matplotlib.pyplot as plt
losses = history['loss']
val_losses = history['val_loss']
accuracies = history['accuracy']
val_accuracies = history['val_accuracy']
show the fact that loss and val_loss decrease by each epoch
plt.xlabel('Epochs')
plt.ylabel('Loss')
plt.plot(losses)
plt.plot(val_losses)
plt.legend(['loss', 'val_loss'])
show the fact that accuray and val_accuray increase by each epoch.
plt.xlabel('Epochs')
plt.ylabel('Accuracy')
plt.plot(accuracies)
plt.plot(val_accuracies)
plt.legend(['acc', 'val_acc'])
run the model on test data and evaluate the loss and accuracy
test_loss, test_acc = MLP_Model.evaluate(X_test, Y_test)
Y_test_predict = MLP_Model.predict(X_test)
Y_test_predict = np.argmax(Y_test_predict, axis=1)
show 80 samples of predicted data.
import matplotlib.pyplot as plt
rand = np.random.randint(0,9999,(1,80))
plt.figure ( figsize = (12,15) )
for i in range(80):
Sample = pre_X_test[rand[0][i]]
plt.subplot(10,10,i+1)
plt.imshow( Sample , cmap = "binary" )
plt.axis("off")
if Y_test_predict[rand[0][i]] == 0 :
plt.title('T-shirt/top', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 1 :
plt.title('Trouser', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 2 :
plt.title('Pullover', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 3 :
plt.title('Dress', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 4 :
plt.title('Coat', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 5 :
plt.title('Sandal', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 6 :
plt.title('Shirt', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 7 :
plt.title('Sneaker', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 8 :
plt.title('Bag', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 9 :
plt.title('Ankle boot', fontsize = 12)
plt.show
calculate the confusion matrix
from sklearn.metrics import confusion_matrix
#Y_test_predict = Y_test_predict.astype('float32')
confusion_matrix ( pre_Y_test , Y_test_predict )
change the the number of neuron in the first layer(1000 to 500).
from keras.models import Sequential
from keras.layers import Dense
from keras.optimizers import SGD
from keras.losses import categorical_crossentropy
MLP_Model = Sequential()
MLP_Model.add(Dense(500, activation='relu', input_shape=(784,)))
MLP_Model.add(Dense(100, activation='relu'))
MLP_Model.add(Dense(10, activation='softmax'))
MLP_Model.summary()
MLP_Model.compile(optimizer = SGD(lr=0.001) , loss = categorical_crossentropy , metrics=['accuracy'])
observe the effect of the number of neurons on losses, val_losses, accuracy and val_accuracy.
network_history = MLP_Model.fit(X_train, Y_train, batch_size=64, epochs=25, validation_split=0.2)
history = network_history.history
import matplotlib.pyplot as plt
losses = history['loss']
val_losses = history['val_loss']
accuracies = history['accuracy']
val_accuracies = history['val_accuracy']
show the fact that loss and val_loss decrease by each epoch
plt.xlabel('Epochs')
plt.ylabel('Loss')
plt.plot(losses)
plt.plot(val_losses)
plt.legend(['loss', 'val_loss'])
show the fact that accuray and val_accuray increase by each epoch.
plt.xlabel('Epochs')
plt.ylabel('Accuracy')
plt.plot(accuracies)
plt.plot(val_accuracies)
plt.legend(['acc', 'val_acc'])
run the model on test data and evaluate the loss and accuracy
test_loss, test_acc = MLP_Model.evaluate(X_test, Y_test)
Y_test_predict = MLP_Model.predict(X_test)
Y_test_predict = np.argmax(Y_test_predict, axis=1)
show 80 samples of predicted data.
import matplotlib.pyplot as plt
rand = np.random.randint(0,9999,(1,80))
plt.figure ( figsize = (12,15) )
for i in range(80):
Sample = pre_X_test[rand[0][i]]
plt.subplot(10,10,i+1)
plt.imshow( Sample , cmap = "binary" )
plt.axis("off")
if Y_test_predict[rand[0][i]] == 0 :
plt.title('T-shirt/top', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 1 :
plt.title('Trouser', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 2 :
plt.title('Pullover', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 3 :
plt.title('Dress', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 4 :
plt.title('Coat', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 5 :
plt.title('Sandal', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 6 :
plt.title('Shirt', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 7 :
plt.title('Sneaker', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 8 :
plt.title('Bag', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 9 :
plt.title('Ankle boot', fontsize = 12)
plt.show
calculate the confusion matrix
from sklearn.metrics import confusion_matrix
#Y_test_predict = Y_test_predict.astype('float32')
confusion_matrix ( pre_Y_test , Y_test_predict )
change the the number of neurons.
from keras.models import Sequential
from keras.layers import Dense
from keras.optimizers import SGD
from keras.losses import categorical_crossentropy
MLP_Model = Sequential()
MLP_Model.add(Dense(500, activation='relu', input_shape=(784,)))
MLP_Model.add(Dense(100, activation='relu'))
MLP_Model.add(Dense(10, activation='softmax'))
MLP_Model.summary()
MLP_Model.compile(optimizer = SGD(lr=0.001) , loss = categorical_crossentropy , metrics=['accuracy'])
observe the effect of the number of neurons on losses, val_losses, accuracy and val_accuracy.
network_history = MLP_Model.fit(X_train, Y_train, batch_size=256, epochs=25, validation_split=0.2)
history = network_history.history
import matplotlib.pyplot as plt
losses = history['loss']
val_losses = history['val_loss']
accuracies = history['accuracy']
val_accuracies = history['val_accuracy']
show the fact that loss and val_loss decrease by each epoch
plt.xlabel('Epochs')
plt.ylabel('Loss')
plt.plot(losses)
plt.plot(val_losses)
plt.legend(['loss', 'val_loss'])
show the fact that accuray and val_accuray increase by each epoch.
plt.xlabel('Epochs')
plt.ylabel('Accuracy')
plt.plot(accuracies)
plt.plot(val_accuracies)
plt.legend(['acc', 'val_acc'])
run the model on test data and evaluate the loss and accuracy
test_loss, test_acc = MLP_Model.evaluate(X_test, Y_test)
Y_test_predict = MLP_Model.predict(X_test)
Y_test_predict = np.argmax(Y_test_predict, axis=1)
show 80 samples of predicted data.
import matplotlib.pyplot as plt
rand = np.random.randint(0,9999,(1,80))
plt.figure ( figsize = (12,15) )
for i in range(80):
Sample = pre_X_test[rand[0][i]]
plt.subplot(10,10,i+1)
plt.imshow( Sample , cmap = "binary" )
plt.axis("off")
if Y_test_predict[rand[0][i]] == 0 :
plt.title('T-shirt/top', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 1 :
plt.title('Trouser', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 2 :
plt.title('Pullover', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 3 :
plt.title('Dress', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 4 :
plt.title('Coat', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 5 :
plt.title('Sandal', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 6 :
plt.title('Shirt', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 7 :
plt.title('Sneaker', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 8 :
plt.title('Bag', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 9 :
plt.title('Ankle boot', fontsize = 12)
plt.show
calculate the confusion matrix
from sklearn.metrics import confusion_matrix
#Y_test_predict = Y_test_predict.astype('float32')
confusion_matrix ( pre_Y_test , Y_test_predict )
change the the number of neurons.
from keras.models import Sequential
from keras.layers import Dense
from keras.optimizers import SGD
from keras.losses import categorical_crossentropy
MLP_Model = Sequential()
MLP_Model.add(Dense(1000, activation='relu', input_shape=(784,)))
MLP_Model.add(Dense(500, activation='relu'))
MLP_Model.add(Dense(10, activation='softmax'))
MLP_Model.summary()
MLP_Model.compile(optimizer = SGD(lr=0.001) , loss = categorical_crossentropy , metrics=['accuracy'])
observe the effect of the number of neurons on losses, val_losses, accuracy and val_accuracy.
network_history = MLP_Model.fit(X_train, Y_train, batch_size=32, epochs=30, validation_split=0.2)
history = network_history.history
import matplotlib.pyplot as plt
losses = history['loss']
val_losses = history['val_loss']
accuracies = history['accuracy']
val_accuracies = history['val_accuracy']
show the fact that loss and val_loss decrease by each epoch
plt.xlabel('Epochs')
plt.ylabel('Loss')
plt.plot(losses)
plt.plot(val_losses)
plt.legend(['loss', 'val_loss'])
show the fact that accuray and val_accuray increase by each epoch.
plt.xlabel('Epochs')
plt.ylabel('Accuracy')
plt.plot(accuracies)
plt.plot(val_accuracies)
plt.legend(['acc', 'val_acc'])
run the model on test data and evaluate the loss and accuracy
test_loss, test_acc = MLP_Model.evaluate(X_test, Y_test)
Y_test_predict = MLP_Model.predict(X_test)
Y_test_predict = np.argmax(Y_test_predict, axis=1)
show 80 samples of predicted data.
import matplotlib.pyplot as plt
rand = np.random.randint(0,9999,(1,80))
plt.figure ( figsize = (12,15) )
for i in range(80):
Sample = pre_X_test[rand[0][i]]
plt.subplot(10,10,i+1)
plt.imshow( Sample , cmap = "binary" )
plt.axis("off")
if Y_test_predict[rand[0][i]] == 0 :
plt.title('T-shirt/top', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 1 :
plt.title('Trouser', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 2 :
plt.title('Pullover', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 3 :
plt.title('Dress', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 4 :
plt.title('Coat', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 5 :
plt.title('Sandal', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 6 :
plt.title('Shirt', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 7 :
plt.title('Sneaker', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 8 :
plt.title('Bag', fontsize = 12)
elif Y_test_predict[rand[0][i]] == 9 :
plt.title('Ankle boot', fontsize = 12)
plt.show
calculate the confusion matrix
from sklearn.metrics import confusion_matrix
#Y_test_predict = Y_test_predict.astype('float32')
confusion_matrix ( pre_Y_test , Y_test_predict )